# -*- coding:utf-8 -*-
import multiprocessing
import time
import redis
import json
import argparse
import os
import re

import sys
import datetime
import json
root_path = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.append(root_path)
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "ElevatorAdmin.settings")
import django
django.setup()
from utils.logger import get_logger
from maintenance.models import Task

PROCESS_NUM = 16
LIST_NAME = 'task'
STOP_FLAG = '.sync_stop'
LOG_FILE = 'sync.log'

logger = get_logger("data-sync")


def func():
    sub_run = True
    r = redis.Redis(host='localhost', port=6379, db=3)
    while sub_run:
        data = '{"init": 0}'
        try:
            pn = multiprocessing.current_process().name
            logger.info('===%s===' % multiprocessing.current_process().name)
            logger.info('[%s] queue len: %d' % (pn, r.llen(LIST_NAME)))
            ret = r.brpop(LIST_NAME, 5)
            if ret is None:
                if os.path.exists(STOP_FLAG):
                    logger.info('@@@stop')
                    sub_run = False
                continue
            data = json.loads(ret[1])
            logger.info(data)
            sn = data.get("sn", None)
            if not sn:
                continue
            t = Task.objects.filter(sn=sn)
            if not t:
                Task.objects.create(**data)
            elif t.first().update_time < datetime.datetime.strptime(data.get("update_time"), "%Y-%m-%d %H:%M:%S"):
                t.update(**data)
            error = False
        except KeyboardInterrupt:
            sub_run = False
            logger.info('@@@@@@stop by KeyboardInterrupt')
            error = True
        except Exception as e:
            logger.info(e)
            error = True
        if error:
            if data:
                r.lpush(LIST_NAME, json.dumps(data))
                logger.error('<<<<push data back')
                logger.error(json.dumps(data))
            else:
                logger.error('<<< no data to push back')

        if os.path.exists(STOP_FLAG):
            sub_run = False
            logger.info('@@@@@@stop')


def is_sync_running(c):
    ps = os.popen('ps -ef | grep "python data-sync.py" | grep -v grep | grep -v %s' % c).read().split('\n')
    if len(ps) > 1:
        return True
    else:
        return False


if __name__ == '__main__':
    parser = argparse.ArgumentParser()
    parser.add_argument('-action', type=str, choices=['start', 'stop', 'status'], required=False, default='start',
                        help='start or stop sync service')
    parser.add_argument('-process', type=int, required=False, default=1, help='process number, default to 1')
    args = parser.parse_args()
    # time.sleep(20)
    if args.action == 'start':
        if is_sync_running(args.action):
            print 'already running, stop it before start a new sync'
        else:
            print 'start sync'
            if os.path.exists(STOP_FLAG):
                os.remove(STOP_FLAG)
            run = True
            while run:
                pool = multiprocessing.Pool(processes=args.process)
                for i in xrange(args.process):
                    pool.apply_async(func)
                pool.close()
                pool.join()
                if os.path.exists(STOP_FLAG):
                    run = False
                    print '!!!!stop'
                    os.remove(STOP_FLAG)
                    # while True:
    elif args.action == 'stop':
        if not is_sync_running(args.action):
            print 'not running'
        else:
            if not os.path.exists(STOP_FLAG):
                os.mknod(STOP_FLAG)
            print 'signalled stop, check it by "status" argument'
    elif args.action == 'status':
        if is_sync_running(args.action):
            print 'sync running'
        else:
            print 'not running'
