# -*- coding: utf-8 -*-
import sys
sys.path.append("..")

from MysqlCon import MysqlCon
from pymongo import MongoClient
import pymysql
import time
import json
import demjson


class MysqlReader(object):
  """
  """


  #写入文件
  def insert_file(self, fileName, dataList):
    f = "d:\\data\\mysql\\%s.sql" %(fileName)
    try:
      #只需要将之前的”w"改为“a"即可，代表追加内容
      with open(f, "a" ,encoding="utf-8") as file:
        for json in dataList:
          file.write(str(json) + "\n")
    except Exception as e:
      print(e)

    return True

  #批量插入
  def insert_batch(self, connect, cursor, insertList):
    connect.ping(reconnect = True)
    insert_errors = []
    count = 0
    for insert_sql in insertList :
      try:
        ret = cursor.execute(insert_sql)
        count += ret
      except Exception as e:
        print(e)
        insert_errors.append(insert_sql)
    # 提交，不进行提交无法保存到数据库
    connect.commit()
    #connect.close()
    #将异常的sql保存下来
    if len(insert_errors) > 0 :
      self.insert_file('error_sql2', insert_errors)
      print("@@@插入失败数：", len(insert_errors))

    return count

  #多行插入
  def insert_many(self, connect, cursor, insert_sql, insert_data):
    connect.ping(reconnect = True)

    # 批量插入
    try:
      res = cursor.executemany(insert_sql, insert_data)
      print("res = ", res)
      # 提交，不进行提交无法保存到数据库
      connect.commit()
    except Exception as e:
      print(e)
      connect.rollback()
    #finally:
    #connect.close()

    return True

  #统计记录数
  def count_data(self, connect, cursor, query_sql):
    connect.ping(reconnect = True)
    try:
      cursor.execute(query_sql)
      queryResult = cursor.fetchone()

    except Exception as e:
      print(e)
    #finally:

    return queryResult

  #查询数据
  def query_data(self, connect, cursor, query_sql):
    connect.ping(reconnect = True)
    try:
      cursor.execute(query_sql)
      queryResult = cursor.fetchall()

    except Exception as e:
      print(e)
    #finally:

    return queryResult

  #转换数据
  def convert_data(self, queryResult, dict):
    convertList = []
    for x in queryResult:
      #print(x)
      rowData = {}
      id = x[0]
      _id = x[1]
      order_id = x[3]
      mission_id = x[4]
      sImei = x[5]
      iStatus = x[6]
      app_id = x[7]
      rowData.setdefault('jg_id', _id)
      rowData.setdefault('order_id', order_id)
      rowData.setdefault('mission_id', mission_id)
      rowData.setdefault('imei_code', sImei)
      rowData.setdefault('status', iStatus)
      rowData.setdefault('app_id', app_id)
      #print(rowData)
      data = x[2]
      dataList = demjson.decode(data)
      #print(len(dataList))
      for data in dataList:
        try:
          tag_id = data['tagid']
          tag_value = data['tag_value']
          tag_value = ','.join(tag_value)
          field = dict[tag_id]
          rowData.setdefault(field, tag_value)
          #print(tag_id, tag_value)
        except Exception as ex:
          id = 0
          #print(tag_id, tag_value)
      #print(rowData)
      fields = rowData.keys()
      values = rowData.values()
      cols = str(tuple(fields)).replace("'", "")
      sql = 'insert into dw_imei_tag ' + cols + ' values ' + str(tuple(values))
      #print(sql)
      convertList.append(sql)

    return convertList

  #读取异常Sql
  def readErrorList(self, fileName):
    path = "d:\\data\\mysql\\%s.sql" %(fileName)
    try:
      with open(path, "r" ,encoding="utf-8") as file:
        lines = file.readlines()
    except Exception as e:
      print(e)

    return lines

if __name__ == "__main__":
  #mysql
  odsdbConn = pymysql.connect(host='172.28.56.90', user='mysql', password='egSQ7HhxajHZjvdX', port=3306, database='odsdb',charset="utf8")
  odsdbCursor = odsdbConn.cursor()
  dwConn = pymysql.connect(host='172.28.56.90', user='mysql', password='egSQ7HhxajHZjvdX', port=3306, database='dw',charset="utf8")
  dwCursor = dwConn.cursor()

  #记录执行开始时间
  str_time = int(time.time())

  #实例化类
  obj = MysqlReader()
  #insertList = []

  count_sql = "select count(id) from jg_person_tags_07 "
  query_sql = "select * from jg_person_tags_07 limit %d, %d"
  dict = {'13': 'sex', '21': 'age_group', '32': 'career', '231': 'is_college_stu', '232': 'education', '16': 'marriage', '11': 'have_child', '22': 'have_car', '24': 'income_level', '23': 'consume_power', '1': 'mobile_model', 'c1': 'mobile_brand', '4': 'operator', '5': 'system', '27': 'consume_level', '26': 'consume_prefer', '28': 'purchase_prefer', '29': 'surf_purpose', '30': 'read_prefer', '31': 'habit', 'b115': 'is_chinese', '6': 'mobile_value', 'b5': 'client_value', 'b6': 'active_city', 'b9': 'workday_traffic', 'b10': 'holiday_traffic', 'b11': 'app_prefer', 'b14': 'estate_app', 'b15': 'estate_app_freq', 'b16': 'app_use_dura', 'b17': 'app_use_freq', 'b31': 'purchase_app', 'b49': 'decorate_demand', 'b50': 'car_demand', 'b51': 'educate_demand', 'b58': 'travel_app', 'b59': 'long_traffic_prefer', 'b61': 'travel_abroad', 'b4': 'resi_district_price', 'b40': 'mall_prefer', 'b8': 'active_comarea', 'b2': 'have_house', 'b3': 'car_brand', 'b62': 'in_province_travel', 'b63': 'out_province_travel', 'b1': 'have_pet', 'b111': 'rent_city', 'b112': 'rent_county', 'b113': 'rent_estate', 'b114': 'rent_frequent', 'b12': 'freq_use_app', 'b13': 'app_use_time', 'b18': 'bank_prefer', 'b19': 'invest_prefer', 'b20': 'loan_prefer', 'b21': 'lottery_prefer', 'b22': 'live_prefer', 'b23': 'game_prefer', 'b24': 'health_care', 'b25': 'entertainment', 'b26': 'food_category', 'b27': 'convenience', 'b28': 'social_active_level', 'b29': 'social_prefer', 'b30': 'educate_prefer', 'b32': 'offline_brand_prefer', 'b33': 'offline_shop_prefer', 'b34': 'offline_brand_type', 'b35': 'is_visit_mall', 'b36': 'is_visit_gym', 'b37': 'is_visiti_hotel', 'b39': 'hotel_duration', 'b41': 'hotel_prefer_poi', 'b42': 'ent_prefer_poi', 'b43': 'ent_str_prefer_poi', 'b44': 'food_prefer_type', 'b45': 'food_prefer_poi', 'b46': 'gas_prefer_poi', 'b47': 'car_repair_poi', 'b52': 'weekend_entertain', 'b53': 'weekent_purchase', 'b54': 'poi_prefer_cate', 'b55': 'poi_str_prefer_cate', 'b56': 'is_weekend_travel', 'b57': 'travel_dest_type', 'b60': 'long_traffic_str_prefer', 'b64': 'travel_abroad_dest', 'b65': 'travel_month_prefer', 'b67': 'bustrip_city', 'b68': 'travel_prefer_poi', 'b69': 'travel_poi_duration', 'b7': 'resi_comarea', 'b70': 'insp_house_city', 'b71': 'insp_house_county', 'b72': 'insp_house_poi', 'b73': 'insp_house_freq', 'b38': 'hotel_brand_prefer', 'b66': 'hotel_level_prefer', 'b86': 'mall_prefer_lnglat', 'b85': 'insp_house_lnglat', '10': 'equip_cate', 'b48': 'buy_house_demand', 'b80': 'resi_address', 'b81': 'work_address', 'b82': 'resi_province', 'b83': 'resi_city', 'b84': 'resi_county'}

  totalCount = 2220375 #obj.count_data(odsdbConn, odsdbCursor, count_sql)[0]
  print("总记录数", totalCount)
  if totalCount == 0 :
    totalCount = 1;

  pageSize = 1000
  pages = int((totalCount-1)/pageSize) + 1
  row = 0

  for pageNum in range(pages):
    break
    #pageNum = 2220
    query_sql_tmp = query_sql %((pageNum * pageSize), pageSize)
    #print(query_sql_tmp)
    result = obj.query_data(odsdbConn, odsdbCursor, query_sql_tmp)
    #print("记录数：", len(result))
    convertList = obj.convert_data(result, dict)
    convertRow = len(convertList)
    print("转换数：", convertRow)
    #break
    #row += 1
    # 批量写入数据
    if convertRow > 0 :
      rest = obj.insert_batch(dwConn, dwCursor, convertList)
      print('插入成功数：', rest)
    #if row > 0 and (row % 100 == 0 or row == maxRow):

    #break
  errorList = obj.readErrorList("error_sql2")
  if len(errorList) > 0 :
    rest = obj.insert_batch(dwConn, dwCursor, errorList)
    print('插入成功数：', rest)

  odsdbConn.close()
  dwConn.close()

  end_time = int(time.time())
  print("程序执行时间（秒）：", end_time - str_time)
