from api.handler.ce.AbstractBatchCE import AbstractBatchCE
from api.model.BatchOutputPipe import BatchOutputPipe
from api.model.FdInputPipe import FdInputPipe
from munch import Munch
# from config.annotation import Value
import json

class ClockRateHandler(AbstractBatchCE):

    def do_compute(self, output_stream: BatchOutputPipe, source_fds: FdInputPipe, params: dict):
        import time
        yesterday_date = time.strftime('%Y%m%d', time.localtime(time.time() - 86400))
        # yesterday_date = '20201208'
        fd = source_fds.get_fd('scupersonmove')

        sql = "select DEPTNAME,STATUSNAME,is_clock,count(PERSONNO) as counts FROM (SELECT DISTINCT DEPTNAME,PERSONNO,STATUSNAME,is_clock FROM scupersonmove WHERE date=DATE_SUB(curdate(),INTERVAL 1 DAY) and DEPTNAME like '%学院%'  and STATUSNAME in ('教工','学生')) as a GROUP BY DEPTNAME,STATUSNAME,is_clock"

        results = fd.query(sql)

        print(len(results))

        output_datas = {}       # 所有数据的列表
        all_colleage = set()
        for result in results:
            all_colleage.add(result.DEPTNAME)
        for i in all_colleage:
            output_datas[i] = {}
        for result in results:
            output_datas[result.DEPTNAME]['DEPTNAME'] = result.DEPTNAME
            if result.STATUSNAME == '教工':
                if result.is_clock:
                    output_datas[result.DEPTNAME]['teacher_clock_num'] = result.counts or 0
                    continue
                output_datas[result.DEPTNAME]['teacher_noclock_num'] = result.counts or 0
            if result.STATUSNAME == '学生':
                if result.is_clock:
                    output_datas[result.DEPTNAME]['student_clock_num'] = result.counts or 0
                    continue
                output_datas[result.DEPTNAME]['student_noclock_num'] = result.counts or 0

        self.task_logger.log(output_datas)

        for key, output_data in output_datas.items():
            data = Munch()
            data.date = yesterday_date
            data.key = output_data['DEPTNAME']+yesterday_date
            data.deptname = output_data['DEPTNAME']
            data.teacher_clock_num = output_data.get('teacher_clock_num') or 0
            data.teacher_noclock_num = output_data.get('teacher_noclock_num') or 0
            data.student_clock_num = output_data.get('student_clock_num') or 0
            data.student_noclock_num = output_data.get('student_noclock_num') or 0
            output_stream.output_for_upsert(data)





# class ShotTimesCEHandler(AbstractBatchCE):
#
#     def do_compute(self, output_stream: BatchOutputPipe, source_fds: FdInputPipe, params: dict):
#         fd = source_fds.get_fd("zt_user")
#         # self.task_logger.log(json.dumps(fd))
#
#         results = fd.query(
#             "SELECT gender,COUNT(1) AS number FROM `zt_user` "
#             " GROUP BY gender ",
#         )
#
#         self.task_logger.log(json.dumps(results))
#         if results is not None and len(results) > 0:
#
#             for result in results:
#                 tar = Munch()
#                 tar.gender = result.gender
#                 tar.number = result.number
#                 output_stream.output_for_upsert(tar)


# class Test(ISimpleScriptCE):
#     def do_compute(self, source_fds, params):
#         fd1 = source_fds.get_fd('a11230956_file')
#         results1 = fd.query(
#             "select count(id) as file_num, task_id from a11230956_file group by task_id")
#         fd2 = source_fds.get_fd('a11230959_task')
#         results2 = fd.query(
#             "select count(id) as task_num, schedule_id from a11230959_task group by schedule_id")
#         from api.model import OutputWrapper
#         return_data = {"schedule_id": None, "file_num": None}
#         bb = list()
#         for res2 in results2:
#             file_num = 0
#             for res1 in results1:
#                 bb.append(OutputWrapper.INSERT({"shedule_id": res2.schedule_id, "file_num": file_num += res1.file_num}))

