import time
import geopandas as gpd
from shapely.geometry import Point
from geoalchemy2 import Geometry
from tools.lbs_utils import *

"""
不断读取并更新缺失数据
add_times:
    p1,p2,start,travel_type,search_hex,travel_time,distance
"""
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)
logger.info("conn redis")

conn = None
model_redis = conn_redis()


def gen_search_hex(se):
    hex = str(se['p1'].wkb_hex) + str(se['p2'].wkb_hex) + str(se['travel_type']) + str(se['start'])
    return hex


def batch_save_time(no_exists_dicts):
    logger.debug('no_exists_dicts: ' + str(len(no_exists_dicts)))
    add_times = batch_transit_time(no_exists_dicts)
    logger.debug('add_times: ' + str(len(add_times)))
    if len(add_times) > 0:
        add_times = gpd.GeoDataFrame.from_dict(add_times)
        add_times['city_id'] = list(map(lambda x: get_city_code(x['city'], 'city_name', 'id'), no_exists_dicts))
        add_times['city_cd'] = list(map(lambda x: get_city_code(x['city'], 'city_name', 'city_code'), no_exists_dicts))
        add_times['p1'] = [Point(round(float(x.split(',')[0]), 3), round(float(x.split(',')[1]), 3)) for x in
                           add_times['p1']]
        add_times['p2'] = [Point(round(float(x.split(',')[0]), 3), round(float(x.split(',')[1]), 3)) for x in
                           add_times['p2']]
        add_times['search_hex'] = add_times.apply(gen_search_hex, axis=1)
        add_times['p1'] = add_times['p1'].apply(create_wkt_element)
        add_times['p2'] = add_times['p2'].apply(create_wkt_element)
        # 使用数据库自带函数更新 update_time
        add_times.drop('period', axis=1, inplace=True)
        add_times.to_sql(name='travel_time', con=conn, if_exists='append', index=False,
                         dtype={'p1': Geometry('POINT'), 'p2': Geometry('POINT')})
        # add_times.to_sql(name='travel_time', con=conn, if_exists='append', index=False)
        logger.info('add records: ' + str(len(no_exists_dicts)))


def create_conn():
    global conn
    if conn is None:
        conn = conn_pgsql()


if __name__ == '__main__':
    # 连接mysql
    create_conn()
    while True:
        try:
            no_exists_points = model_redis.lrange('disp_missing_point', 0, -1)
        except Exception as e:
            no_exists_points = []
            logger.exception(e)
        if len(no_exists_points) > 0:
            time0 = time.clock()
            try:
                no_exists_points = list(map(bytes.decode, no_exists_points))
                no_exists_points = list(map(json.loads, no_exists_points))
            except Exception as e:
                logger.exception(e)
            logger.debug(no_exists_points)
            try:
                model_redis.delete('disp_missing_point')
                logger.info('updated redis')
            except Exception as e:
                logger.exception(e)
            try:
                batch_save_time(no_exists_points)
            except Exception as e:
                logger.exception(e)
                create_conn()
            time1 = time.clock()
            logger.debug('total take:' + str(time1 - time0))
