import datetime
import json
from json import JSONDecodeError

from mysql_conn.mysql_conn import MysqlConn
import pandas as pd
import numpy as np

from tools import locat

ficus_clock_daily = MysqlConn('10.3.1.154', 'root', 'S0beydbaAt2o2o', 'clock_daily', port=3307)
ficus_smart_card = MysqlConn('10.3.1.154', 'root', 'S0beydbaAt2o2o', 'smart_card', port=3307)

ficus_entity = MysqlConn('10.3.1.154', 'root', 'S0beydbaAt2o2o', 'entity_data', port=3307)

yesterday_date = datetime.datetime.now() - datetime.timedelta(days=1)
today_date = datetime.datetime.now()

yesterday_int = yesterday_date.year * 10000 + yesterday_date.month * 100 + yesterday_date.day

yesterday = yesterday_date.date()
today = today_date.date()

print(yesterday_int, yesterday, today)


sql_clock = f"SELECT uid, address, province, area, city, date, geo_api_info, fxyy FROM ncov_report_daily where date={yesterday_int}"
sql_user = f"SELECT uid, user_number from user_role"
clock_daily_data = ficus_clock_daily.read_mysql('ncov_report_daily', sql_clock)
role_data = ficus_clock_daily.read_mysql('user_role', sql_user)
# print(clock_daily_data)
# print(role_data)

# 添加user_number（学工号），按user_number去重,打卡每日无重复
clock_daily_data_lasted = pd.merge(clock_daily_data, role_data, on='uid', how='outer')
clock_daily_data_lasted.drop_duplicates(subset=['user_number'], keep='last', inplace=True)
# print(clock_daily_data_lasted)

# user_number 更名为PERSONNO
clock_daily_data_lasted = clock_daily_data_lasted.rename(columns={'user_number': 'PERSONNO'})

# area清洗为区县,geo清洗为经纬度
clock_daily_data_lasted['area'] = clock_daily_data_lasted.apply(lambda x: x.area.split()[-1] if not pd.isnull(x.area) else np.nan, axis=1)

lng_list, lat_lsit = [], []
for i in clock_daily_data_lasted['geo_api_info']:
    # print(i, type(i))
    try:
        position = json.loads(i)['position']
        lng = position['lng']
        lat = position['lat']

        lng_list.append(lng)
        lat_lsit.append(lat)
    except (KeyError, TypeError, JSONDecodeError):
        lng_list.append(None)
        lat_lsit.append(None)

clock_daily_data_lasted['lng'] = lng_list
clock_daily_data_lasted['lat'] = lat_lsit
# clock_daily_data_lasted['lng'] = clock_daily_data_lasted.apply(lambda x: json.loads(x.geo_api_info).get('position', {}).get('lng') if x.geo_api_info else None, axis=1)



# 删除geo_api_info
clock_daily_data_lasted = clock_daily_data_lasted.drop('geo_api_info', axis=1)

# 删除 uid
clock_daily_data_lasted = clock_daily_data_lasted.drop('uid', axis=1)

# print(clock_daily_data_lasted)

sql_smart_deal = f"SELECT PERSONNO, MACHINEID, DEALDATETIME from v_mid_deal_202012 where DEALDATETIME > '{yesterday}' and DEALDATETIME < '{today}'"
sql_smart_account = f"SELECT PERSONNO, PERSONNAME, SEX, STATUSNAME, DEPTNAME, IDNO from v_mid_cardaccount where ENDCODE = 0"
sql_smart_machine = f"SELECT MACHINEID, ORGNAME from v_mid_machine1209"
smart_data = ficus_smart_card.read_mysql('v_mid_deal_202012_g', sql_smart_deal)
smart_account = ficus_smart_card.read_mysql('v_mid_cardaccount', sql_smart_account)
smart_machine = ficus_smart_card.read_mysql('v_mid_machine1209', sql_smart_machine)

# print(smart_account)
# print(smart_machine)

# 清洗smart_machine
smart_machine['ORGNAME_AREA'] = smart_machine.apply(lambda x: x.ORGNAME.split('-')[-1] if x.ORGNAME else None, axis=1)


# print(smart_data)
# 获取每日每条学生最新的的一次交易
smart_data_lasted = smart_data.drop_duplicates(subset=['PERSONNO'], keep='last', inplace=False)
# print(smart_data_lasted)

# 将smart_account清洗，去重并分离部门
smart_account = smart_account.drop_duplicates(subset=['PERSONNO'], keep='last', inplace=False)
smart_account['DEPTNAME'] = smart_account.apply(lambda x: x.DEPTNAME.split('-')[0], axis=1)
# print(smart_account)

# 将 smart_data_lasted  smart_machine 纵向合表 获取 地址
smart_data_lasted = pd.merge(smart_data_lasted, smart_machine, on='MACHINEID', how='outer')

# 删除 MACHINEID
smart_data_lasted = smart_data_lasted.drop('MACHINEID', axis=1)


# 将 smart_account  smart_data_lasted 纵向合表
full_smart_data = pd.merge(smart_account, smart_data_lasted, on='PERSONNO', how='outer')
# print(full_smart_data)
# print(full_smart_data.columns)

# 将 full_smart_data 与 clock_daily_data_lasted 纵向合表
full_data = pd.merge(full_smart_data, clock_daily_data_lasted, on='PERSONNO', how='outer')

full_data['date'] = yesterday
full_data['is_clock'] = full_data.apply(lambda x: False if pd.isnull(x['area']) else True, axis=1)
full_data['is_deal'] = full_data.apply(lambda x: False if pd.isnull(x['DEALDATETIME']) else True, axis=1)
full_data = full_data.drop_duplicates(subset=['PERSONNO'], keep='last', inplace=False)
record_data = full_data.drop(full_data[((~ full_data.is_clock) & (~ full_data.is_deal))].index)

record_data['locate'] = record_data.apply(lambda x: locat(x['lng'], x['lat'], x['ORGNAME_AREA']), axis=1)
# df = df.drop(df[(df.score < 50) & (df.score > 20)].index)

# 列名大写
record_data.columns = record_data.columns.map(lambda x: x.upper())

print(record_data.head(100))
# print(record_data.columns)
ficus_entity.xls_to_mysql(record_data, 'scupersonmove', if_exists='append')



# test_local_mysql.to_mysql(record_data, 'test_full_daily')

